In [1]:
# import system libs
import os
import time
import random
import pathlib
import itertools
from glob import glob
from tqdm import tqdm_notebook, tnrange

# import data handling tools
import cv2
import numpy as np
import pandas as pd
import seaborn as sns
sns.set_style('darkgrid')
import matplotlib.pyplot as plt
%matplotlib inline
from skimage.color import rgb2gray
from skimage.morphology import label
from skimage.transform import resize
from sklearn.model_selection import train_test_split
from skimage.io import imread, imshow, concatenate_images

# import Deep learning Libraries
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import backend as K
from tensorflow.keras.models import Model, load_model, save_model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.optimizers import Adam, Adamax
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.layers import Input, Activation, BatchNormalization, Dropout, Lambda, Conv2D, Conv2DTranspose, MaxPooling2D, concatenate

# Ignore Warnings
import warnings
warnings.filterwarnings("ignore")

print ('modules loaded')
modules loaded
In [ ]:
 
In [ ]:
 

Create needed functions¶

Function to create dataframe from dataset¶

In [2]:
# function to create dataframe
def create_df(data_dir):
    images_paths = []
    masks_paths = glob(f'{data_dir}/*/*_Mask*')

    for i in masks_paths:
        images_paths.append(i.replace('_Mask', ''))

    df = pd.DataFrame(data= {'images_paths': images_paths, 'masks_paths': masks_paths})

    return df

# Function to split dataframe into train, valid, test
def split_df(df):
    # create train_df
    train_df, dummy_df = train_test_split(df, train_size= 0.5)

    # create valid_df and test_df
    valid_df, test_df = train_test_split(dummy_df, train_size= 0.2)

    return train_df, valid_df, test_df

Function to create image generators and augmentation¶

In [ ]:
 
In [3]:
def create_gens(df, aug_dict):
    img_size = (256, 256)
    batch_size = 2


    img_gen = ImageDataGenerator(**aug_dict)
    msk_gen = ImageDataGenerator(**aug_dict)

    # Create general generator
    image_gen = img_gen.flow_from_dataframe(df, x_col='images_paths', class_mode=None, color_mode='rgb', target_size=img_size,
                                            batch_size=batch_size, save_to_dir=None, save_prefix='image', seed=1)

    mask_gen = msk_gen.flow_from_dataframe(df, x_col='masks_paths', class_mode=None, color_mode='grayscale', target_size=img_size,
                                            batch_size=batch_size, save_to_dir=None, save_prefix= 'Mask', seed=1)

    gen = zip(image_gen, mask_gen)

    for (img, msk) in gen:
        img = img / 255
        msk = msk / 255
        msk[msk > 0.5] = 1
        msk[msk <= 0.5] = 0

        yield (img, msk)

Function that have Unet structure¶

In [4]:
def unet(input_size=(256, 256, 3)):
    inputs = Input(input_size)

    # First DownConvolution / Encoder Leg will begin, so start with Conv2D
    conv1 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(inputs)
    bn1 = Activation("relu")(conv1)
    conv1 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(bn1)
    bn1 = BatchNormalization(axis=3)(conv1)
    bn1 = Activation("relu")(bn1)
    pool1 = MaxPooling2D(pool_size=(2, 2))(bn1)

    conv2 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(pool1)
    bn2 = Activation("relu")(conv2)
    conv2 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(bn2)
    bn2 = BatchNormalization(axis=3)(conv2)
    bn2 = Activation("relu")(bn2)
    pool2 = MaxPooling2D(pool_size=(2, 2))(bn2)

    conv3 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(pool2)
    bn3 = Activation("relu")(conv3)
    conv3 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(bn3)
    bn3 = BatchNormalization(axis=3)(conv3)
    bn3 = Activation("relu")(bn3)
    pool3 = MaxPooling2D(pool_size=(2, 2))(bn3)

    conv4 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(pool3)
    bn4 = Activation("relu")(conv4)
    conv4 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(bn4)
    bn4 = BatchNormalization(axis=3)(conv4)
    bn4 = Activation("relu")(bn4)
    pool4 = MaxPooling2D(pool_size=(2, 2))(bn4)

    conv5 = Conv2D(filters=1024, kernel_size=(3, 3), padding="same")(pool4)
    bn5 = Activation("relu")(conv5)
    conv5 = Conv2D(filters=1024, kernel_size=(3, 3), padding="same")(bn5)
    bn5 = BatchNormalization(axis=3)(conv5)
    bn5 = Activation("relu")(bn5)

    """ Now UpConvolution / Decoder Leg will begin, so start with Conv2DTranspose
    The gray arrows (in the above image) indicate the skip connections that concatenate the encoder feature map with the decoder, which helps the backward flow of gradients for improved training. """
    """ After every concatenation we again apply two consecutive regular convolutions so that the model can learn to assemble a more precise output """

    up6 = concatenate([Conv2DTranspose(512, kernel_size=(2, 2), strides=(2, 2), padding="same")(bn5), conv4], axis=3)
    conv6 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(up6)
    bn6 = Activation("relu")(conv6)
    conv6 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(bn6)
    bn6 = BatchNormalization(axis=3)(conv6)
    bn6 = Activation("relu")(bn6)

    up7 = concatenate([Conv2DTranspose(256, kernel_size=(2, 2), strides=(2, 2), padding="same")(bn6), conv3], axis=3)
    conv7 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(up7)
    bn7 = Activation("relu")(conv7)
    conv7 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(bn7)
    bn7 = BatchNormalization(axis=3)(conv7)
    bn7 = Activation("relu")(bn7)

    up8 = concatenate([Conv2DTranspose(128, kernel_size=(2, 2), strides=(2, 2), padding="same")(bn7), conv2], axis=3)
    conv8 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(up8)
    bn8 = Activation("relu")(conv8)
    conv8 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(bn8)
    bn8 = BatchNormalization(axis=3)(conv8)
    bn8 = Activation("relu")(bn8)

    up9 = concatenate([Conv2DTranspose(64, kernel_size=(2, 2), strides=(2, 2), padding="same")(bn8), conv1], axis=3)
    conv9 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(up9)
    bn9 = Activation("relu")(conv9)
    conv9 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(bn9)
    bn9 = BatchNormalization(axis=3)(conv9)
    bn9 = Activation("relu")(bn9)

    conv10 = Conv2D(filters=1, kernel_size=(1, 1), activation="sigmoid")(bn9)

    return Model(inputs=[inputs], outputs=[conv10])

Functions for coefficients and loss¶

In [5]:
# function to create dice coefficient
def dice_coef(y_true, y_pred, smooth=100):
    y_true_flatten = K.flatten(y_true)
    y_pred_flatten = K.flatten(y_pred)

    intersection = K.sum(y_true_flatten * y_pred_flatten)
    union = K.sum(y_true_flatten) + K.sum(y_pred_flatten)
    return (2 * intersection + smooth) / (union + smooth)

# function to create dice loss
def dice_loss(y_true, y_pred, smooth=100):
    return -dice_coef(y_true, y_pred, smooth)

# function to create iou coefficient
def iou_coef(y_true, y_pred, smooth=100):
    intersection = K.sum(y_true * y_pred)
    sum = K.sum(y_true + y_pred)
    iou = (intersection + smooth) / (sum - intersection + smooth)
    return iou

Function to show images sample¶

In [6]:
def show_images(images, masks):
    plt.figure(figsize=(12, 12))
    for i in range(55):
        plt.subplot(5, 5, i+1)
        img_path = images[i]
        mask_path = masks[i]
        # read image and convert it to RGB scale
        image = cv2.imread(img_path)
        image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
        # read mask
        mask = cv2.imread(mask_path)
        # sho image and mask
        plt.imshow(image)
        plt.imshow(mask, alpha=0.4)

        plt.axis('off')

    plt.tight_layout()
    plt.show()

Function to display training history¶

In [7]:
def plot_training(hist):
    '''
    This function take training model and plot history of accuracy and losses with the best epoch in both of them.
    '''

    # Define needed variables
    tr_acc = hist.history['accuracy']
    tr_iou = hist.history['iou_coef']
    tr_dice = hist.history['dice_coef']
    tr_loss = hist.history['loss']

    val_acc = hist.history['val_accuracy']
    val_iou = hist.history['val_iou_coef']
    val_dice = hist.history['val_dice_coef']
    val_loss = hist.history['val_loss']

    index_acc = np.argmax(val_acc)
    acc_highest = val_acc[index_acc]
    index_iou = np.argmax(iou_coef)
    iou_highest = val_iou[index_iou]
    index_dice = np.argmax(dice_coef)
    dice_highest = val_dice[index_dice]
    index_loss = np.argmin(val_loss)
    val_lowest = val_loss[index_loss]

    Epochs = [i+1 for i in range(len(tr_acc))]

    acc_label = f'best epoch= {str(index_acc + 1)}'
    iou_label = f'best epoch= {str(index_iou + 1)}'
    dice_label = f'best epoch= {str(index_dice + 1)}'
    loss_label = f'best epoch= {str(index_loss + 1)}'

    # Plot training history
    plt.figure(figsize= (20, 20))
    plt.style.use('fivethirtyeight')

    # Training Accuracy
    plt.subplot(2, 2, 1)
    plt.plot(Epochs, tr_acc, 'r', label= 'Training Accuracy')
    plt.plot(Epochs, val_acc, 'g', label= 'Validation Accuracy')
    plt.scatter(index_acc + 1 , acc_highest, s= 150, c= 'blue', label= acc_label)
    plt.title('Training and Validation Accuracy')
    plt.xlabel('Epochs')
    plt.ylabel('Accuracy')
    plt.legend()

    # Training IoU
    plt.subplot(2, 2, 2)
    plt.plot(Epochs, tr_iou, 'r', label= 'Training IoU')
    plt.plot(Epochs, val_iou, 'g', label= 'Validation IoU')
    plt.scatter(index_iou + 1 , iou_highest, s= 150, c= 'blue', label= iou_label)
    plt.title('Training and Validation IoU Coefficient')
    plt.xlabel('Epochs')
    plt.ylabel('IoU')
    plt.legend()

    # Training Dice
    plt.subplot(2, 2, 3)
    plt.plot(Epochs, tr_dice, 'r', label= 'Training Dice')
    plt.plot(Epochs, val_dice, 'g', label= 'Validation Dice')
    plt.scatter(index_dice + 1 , dice_highest, s= 150, c= 'blue', label= dice_label)
    plt.title('Training and Validation Dice Coefficient')
    plt.xlabel('Epochs')
    plt.ylabel('Dice')
    plt.legend()

    # Training Loss
    plt.subplot(2, 2, 4)
    plt.plot(Epochs, tr_loss, 'r', label= 'Training loss')
    plt.plot(Epochs, val_loss, 'g', label= 'Validation loss')
    plt.scatter(index_loss + 1, val_lowest, s= 150, c= 'blue', label= loss_label)
    plt.title('Training and Validation Loss')
    plt.xlabel('Epochs')
    plt.ylabel('Loss')
    plt.legend()

    plt.tight_layout
    plt.show()

Model Structure¶

Start reading data¶

In [8]:
data_dir= r"C:\Users\anesh\Downloads\data - Copy\dataset1\newfolder\LGG-PHO-Seg\Kaggle"
df = create_df(data_dir)
train_df, valid_df, test_df = split_df(df)


tr_aug_dict = dict(rotation_range=0.1,
                            width_shift_range=0.05,
                            height_shift_range=0.05,
                            shear_range=0.05,
                            zoom_range=0.05,
                            horizontal_flip=True,
                            fill_mode='nearest')


train_gen = create_gens(train_df, aug_dict=tr_aug_dict)
valid_gen = create_gens(valid_df, aug_dict={})
test_gen = create_gens(test_df, aug_dict={})
In [9]:
import matplotlib.pyplot as plt
import cv2

def show_images(images, masks):
    # Set the number of images to display based on the shorter length of images or a maximum display limit
    num_images = min(len(images), 25)  # Choose a suitable display limit, e.g., 25 images
    plt.figure(figsize=(10, 10))
    for i in range(num_images):
        plt.subplot(5, 5, i + 1)
        img_path = images[i]
        mask_path = masks[i]
        
        # Read and display the image and mask
        img = cv2.imread(img_path)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        mask = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE)

        plt.imshow(img)
        plt.imshow(mask, alpha=0.5, cmap='jet')  # Overlay mask with some transparency
        plt.axis('off')

    plt.tight_layout()
    plt.show()

# Call the function with your image and mask lists
show_images(list(train_df['images_paths']), list(train_df['masks_paths']))
No description has been provided for this image
In [ ]:
 
In [10]:
df=create_df(data_dir)
print(df.head())
                                        images_paths  \
0  C:\Users\anesh\Downloads\data - Copy\dataset1\...   
1  C:\Users\anesh\Downloads\data - Copy\dataset1\...   
2  C:\Users\anesh\Downloads\data - Copy\dataset1\...   
3  C:\Users\anesh\Downloads\data - Copy\dataset1\...   
4  C:\Users\anesh\Downloads\data - Copy\dataset1\...   

                                         masks_paths  
0  C:\Users\anesh\Downloads\data - Copy\dataset1\...  
1  C:\Users\anesh\Downloads\data - Copy\dataset1\...  
2  C:\Users\anesh\Downloads\data - Copy\dataset1\...  
3  C:\Users\anesh\Downloads\data - Copy\dataset1\...  
4  C:\Users\anesh\Downloads\data - Copy\dataset1\...  

Unet Model¶

In [11]:
model = unet()
model.compile(Adamax(learning_rate= 0.001), loss= dice_loss, metrics= ['accuracy', iou_coef, dice_coef])

model.summary()
Model: "functional"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
┃ Layer (type)                  ┃ Output Shape              ┃         Param # ┃ Connected to               ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
│ input_layer (InputLayer)      │ (None, 256, 256, 3)       │               0 │ -                          │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d (Conv2D)               │ (None, 256, 256, 64)      │           1,792 │ input_layer[0][0]          │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation (Activation)       │ (None, 256, 256, 64)      │               0 │ conv2d[0][0]               │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_1 (Conv2D)             │ (None, 256, 256, 64)      │          36,928 │ activation[0][0]           │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization           │ (None, 256, 256, 64)      │             256 │ conv2d_1[0][0]             │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_1 (Activation)     │ (None, 256, 256, 64)      │               0 │ batch_normalization[0][0]  │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ max_pooling2d (MaxPooling2D)  │ (None, 128, 128, 64)      │               0 │ activation_1[0][0]         │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_2 (Conv2D)             │ (None, 128, 128, 128)     │          73,856 │ max_pooling2d[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_2 (Activation)     │ (None, 128, 128, 128)     │               0 │ conv2d_2[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_3 (Conv2D)             │ (None, 128, 128, 128)     │         147,584 │ activation_2[0][0]         │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_1         │ (None, 128, 128, 128)     │             512 │ conv2d_3[0][0]             │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_3 (Activation)     │ (None, 128, 128, 128)     │               0 │ batch_normalization_1[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ max_pooling2d_1               │ (None, 64, 64, 128)       │               0 │ activation_3[0][0]         │
│ (MaxPooling2D)                │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_4 (Conv2D)             │ (None, 64, 64, 256)       │         295,168 │ max_pooling2d_1[0][0]      │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_4 (Activation)     │ (None, 64, 64, 256)       │               0 │ conv2d_4[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_5 (Conv2D)             │ (None, 64, 64, 256)       │         590,080 │ activation_4[0][0]         │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_2         │ (None, 64, 64, 256)       │           1,024 │ conv2d_5[0][0]             │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_5 (Activation)     │ (None, 64, 64, 256)       │               0 │ batch_normalization_2[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ max_pooling2d_2               │ (None, 32, 32, 256)       │               0 │ activation_5[0][0]         │
│ (MaxPooling2D)                │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_6 (Conv2D)             │ (None, 32, 32, 512)       │       1,180,160 │ max_pooling2d_2[0][0]      │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_6 (Activation)     │ (None, 32, 32, 512)       │               0 │ conv2d_6[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_7 (Conv2D)             │ (None, 32, 32, 512)       │       2,359,808 │ activation_6[0][0]         │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_3         │ (None, 32, 32, 512)       │           2,048 │ conv2d_7[0][0]             │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_7 (Activation)     │ (None, 32, 32, 512)       │               0 │ batch_normalization_3[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ max_pooling2d_3               │ (None, 16, 16, 512)       │               0 │ activation_7[0][0]         │
│ (MaxPooling2D)                │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_8 (Conv2D)             │ (None, 16, 16, 1024)      │       4,719,616 │ max_pooling2d_3[0][0]      │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_8 (Activation)     │ (None, 16, 16, 1024)      │               0 │ conv2d_8[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_9 (Conv2D)             │ (None, 16, 16, 1024)      │       9,438,208 │ activation_8[0][0]         │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_4         │ (None, 16, 16, 1024)      │           4,096 │ conv2d_9[0][0]             │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_9 (Activation)     │ (None, 16, 16, 1024)      │               0 │ batch_normalization_4[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_transpose              │ (None, 32, 32, 512)       │       2,097,664 │ activation_9[0][0]         │
│ (Conv2DTranspose)             │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ concatenate (Concatenate)     │ (None, 32, 32, 1024)      │               0 │ conv2d_transpose[0][0],    │
│                               │                           │                 │ conv2d_7[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_10 (Conv2D)            │ (None, 32, 32, 512)       │       4,719,104 │ concatenate[0][0]          │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_10 (Activation)    │ (None, 32, 32, 512)       │               0 │ conv2d_10[0][0]            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_11 (Conv2D)            │ (None, 32, 32, 512)       │       2,359,808 │ activation_10[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_5         │ (None, 32, 32, 512)       │           2,048 │ conv2d_11[0][0]            │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_11 (Activation)    │ (None, 32, 32, 512)       │               0 │ batch_normalization_5[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_transpose_1            │ (None, 64, 64, 256)       │         524,544 │ activation_11[0][0]        │
│ (Conv2DTranspose)             │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ concatenate_1 (Concatenate)   │ (None, 64, 64, 512)       │               0 │ conv2d_transpose_1[0][0],  │
│                               │                           │                 │ conv2d_5[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_12 (Conv2D)            │ (None, 64, 64, 256)       │       1,179,904 │ concatenate_1[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_12 (Activation)    │ (None, 64, 64, 256)       │               0 │ conv2d_12[0][0]            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_13 (Conv2D)            │ (None, 64, 64, 256)       │         590,080 │ activation_12[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_6         │ (None, 64, 64, 256)       │           1,024 │ conv2d_13[0][0]            │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_13 (Activation)    │ (None, 64, 64, 256)       │               0 │ batch_normalization_6[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_transpose_2            │ (None, 128, 128, 128)     │         131,200 │ activation_13[0][0]        │
│ (Conv2DTranspose)             │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ concatenate_2 (Concatenate)   │ (None, 128, 128, 256)     │               0 │ conv2d_transpose_2[0][0],  │
│                               │                           │                 │ conv2d_3[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_14 (Conv2D)            │ (None, 128, 128, 128)     │         295,040 │ concatenate_2[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_14 (Activation)    │ (None, 128, 128, 128)     │               0 │ conv2d_14[0][0]            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_15 (Conv2D)            │ (None, 128, 128, 128)     │         147,584 │ activation_14[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_7         │ (None, 128, 128, 128)     │             512 │ conv2d_15[0][0]            │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_15 (Activation)    │ (None, 128, 128, 128)     │               0 │ batch_normalization_7[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_transpose_3            │ (None, 256, 256, 64)      │          32,832 │ activation_15[0][0]        │
│ (Conv2DTranspose)             │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ concatenate_3 (Concatenate)   │ (None, 256, 256, 128)     │               0 │ conv2d_transpose_3[0][0],  │
│                               │                           │                 │ conv2d_1[0][0]             │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_16 (Conv2D)            │ (None, 256, 256, 64)      │          73,792 │ concatenate_3[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_16 (Activation)    │ (None, 256, 256, 64)      │               0 │ conv2d_16[0][0]            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_17 (Conv2D)            │ (None, 256, 256, 64)      │          36,928 │ activation_16[0][0]        │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ batch_normalization_8         │ (None, 256, 256, 64)      │             256 │ conv2d_17[0][0]            │
│ (BatchNormalization)          │                           │                 │                            │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ activation_17 (Activation)    │ (None, 256, 256, 64)      │               0 │ batch_normalization_8[0][… │
├───────────────────────────────┼───────────────────────────┼─────────────────┼────────────────────────────┤
│ conv2d_18 (Conv2D)            │ (None, 256, 256, 1)       │              65 │ activation_17[0][0]        │
└───────────────────────────────┴───────────────────────────┴─────────────────┴────────────────────────────┘
 Total params: 31,043,521 (118.42 MB)
 Trainable params: 31,037,633 (118.40 MB)
 Non-trainable params: 5,888 (23.00 KB)

Model training¶

In [12]:
epochs = 30
batch_size = 2
callbacks = [ModelCheckpoint('unet.keras', verbose=0, save_best_only=True)]

history = model.fit(train_gen,
                    steps_per_epoch=int(len(train_df) / batch_size),
                    epochs=epochs,
                    verbose=1,
                    callbacks=callbacks,
                    validation_data=valid_gen,
                    validation_steps=int(len(valid_df) / batch_size))
Found 46 validated image filenames.
Found 46 validated image filenames.
Epoch 1/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 0s 2s/step - accuracy: 0.5850 - dice_coef: 0.5935 - iou_coef: 0.4369 - loss: -0.5935Found 9 validated image filenames.
Found 9 validated image filenames.
23/23 ━━━━━━━━━━━━━━━━━━━━ 80s 3s/step - accuracy: 0.5869 - dice_coef: 0.5956 - iou_coef: 0.4392 - loss: -0.5956 - val_accuracy: 0.5460 - val_dice_coef: 0.4756 - val_iou_coef: 0.3181 - val_loss: -0.4756
Epoch 2/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 70s 3s/step - accuracy: 0.6645 - dice_coef: 0.6695 - iou_coef: 0.5125 - loss: -0.6695 - val_accuracy: 0.5109 - val_dice_coef: 0.5593 - val_iou_coef: 0.4170 - val_loss: -0.6033
Epoch 3/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 73s 3s/step - accuracy: 0.6999 - dice_coef: 0.6958 - iou_coef: 0.5489 - loss: -0.6958 - val_accuracy: 0.4246 - val_dice_coef: 0.6050 - val_iou_coef: 0.4441 - val_loss: -0.5862
Epoch 4/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 116s 5s/step - accuracy: 0.7904 - dice_coef: 0.7758 - iou_coef: 0.6370 - loss: -0.7758 - val_accuracy: 0.4274 - val_dice_coef: 0.6006 - val_iou_coef: 0.4431 - val_loss: -0.5881
Epoch 5/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 82s 3s/step - accuracy: 0.8573 - dice_coef: 0.8144 - iou_coef: 0.6915 - loss: -0.8144 - val_accuracy: 0.4801 - val_dice_coef: 0.5890 - val_iou_coef: 0.4423 - val_loss: -0.6363
Epoch 6/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 66s 3s/step - accuracy: 0.8358 - dice_coef: 0.8145 - iou_coef: 0.6937 - loss: -0.8145 - val_accuracy: 0.4364 - val_dice_coef: 0.6150 - val_iou_coef: 0.4480 - val_loss: -0.6150
Epoch 7/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 84s 4s/step - accuracy: 0.8441 - dice_coef: 0.8113 - iou_coef: 0.6896 - loss: -0.8113 - val_accuracy: 0.5064 - val_dice_coef: 0.6867 - val_iou_coef: 0.5310 - val_loss: -0.6784
Epoch 8/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 66s 3s/step - accuracy: 0.8686 - dice_coef: 0.8409 - iou_coef: 0.7288 - loss: -0.8409 - val_accuracy: 0.4718 - val_dice_coef: 0.5959 - val_iou_coef: 0.4450 - val_loss: -0.6397
Epoch 9/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 75s 3s/step - accuracy: 0.8589 - dice_coef: 0.8252 - iou_coef: 0.7114 - loss: -0.8252 - val_accuracy: 0.4269 - val_dice_coef: 0.5772 - val_iou_coef: 0.4151 - val_loss: -0.6039
Epoch 10/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 69s 3s/step - accuracy: 0.8689 - dice_coef: 0.8487 - iou_coef: 0.7418 - loss: -0.8487 - val_accuracy: 0.4633 - val_dice_coef: 0.5798 - val_iou_coef: 0.4285 - val_loss: -0.6200
Epoch 11/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 67s 3s/step - accuracy: 0.8658 - dice_coef: 0.8421 - iou_coef: 0.7338 - loss: -0.8421 - val_accuracy: 0.5060 - val_dice_coef: 0.6507 - val_iou_coef: 0.4867 - val_loss: -0.6507
Epoch 12/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 82s 4s/step - accuracy: 0.8924 - dice_coef: 0.8725 - iou_coef: 0.7760 - loss: -0.8725 - val_accuracy: 0.5860 - val_dice_coef: 0.6983 - val_iou_coef: 0.5385 - val_loss: -0.6985
Epoch 13/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 70s 3s/step - accuracy: 0.8805 - dice_coef: 0.8495 - iou_coef: 0.7429 - loss: -0.8495 - val_accuracy: 0.6630 - val_dice_coef: 0.5980 - val_iou_coef: 0.4420 - val_loss: -0.6333
Epoch 14/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 77s 3s/step - accuracy: 0.8804 - dice_coef: 0.8316 - iou_coef: 0.7203 - loss: -0.8316 - val_accuracy: 0.5877 - val_dice_coef: 0.6425 - val_iou_coef: 0.4910 - val_loss: -0.6808
Epoch 15/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 83s 4s/step - accuracy: 0.8884 - dice_coef: 0.8678 - iou_coef: 0.7689 - loss: -0.8678 - val_accuracy: 0.6367 - val_dice_coef: 0.6207 - val_iou_coef: 0.4641 - val_loss: -0.6551
Epoch 16/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 80s 3s/step - accuracy: 0.8676 - dice_coef: 0.8380 - iou_coef: 0.7245 - loss: -0.8380 - val_accuracy: 0.7236 - val_dice_coef: 0.7611 - val_iou_coef: 0.6222 - val_loss: -0.7611
Epoch 17/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 75s 3s/step - accuracy: 0.8997 - dice_coef: 0.8747 - iou_coef: 0.7803 - loss: -0.8747 - val_accuracy: 0.6776 - val_dice_coef: 0.7296 - val_iou_coef: 0.5896 - val_loss: -0.7057
Epoch 18/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 77s 3s/step - accuracy: 0.8744 - dice_coef: 0.8549 - iou_coef: 0.7499 - loss: -0.8549 - val_accuracy: 0.8029 - val_dice_coef: 0.8185 - val_iou_coef: 0.7074 - val_loss: -0.8124
Epoch 19/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 76s 3s/step - accuracy: 0.8874 - dice_coef: 0.8581 - iou_coef: 0.7551 - loss: -0.8581 - val_accuracy: 0.7313 - val_dice_coef: 0.7859 - val_iou_coef: 0.6478 - val_loss: -0.7889
Epoch 20/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 76s 3s/step - accuracy: 0.8878 - dice_coef: 0.8646 - iou_coef: 0.7663 - loss: -0.8646 - val_accuracy: 0.6711 - val_dice_coef: 0.7053 - val_iou_coef: 0.5557 - val_loss: -0.6934
Epoch 21/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 75s 3s/step - accuracy: 0.8935 - dice_coef: 0.8605 - iou_coef: 0.7616 - loss: -0.8605 - val_accuracy: 0.7304 - val_dice_coef: 0.7627 - val_iou_coef: 0.6181 - val_loss: -0.7627
Epoch 22/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 74s 3s/step - accuracy: 0.8543 - dice_coef: 0.8352 - iou_coef: 0.7205 - loss: -0.8352 - val_accuracy: 0.7544 - val_dice_coef: 0.7777 - val_iou_coef: 0.6510 - val_loss: -0.7568
Epoch 23/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 76s 3s/step - accuracy: 0.8910 - dice_coef: 0.8659 - iou_coef: 0.7678 - loss: -0.8659 - val_accuracy: 0.7226 - val_dice_coef: 0.7861 - val_iou_coef: 0.6527 - val_loss: -0.7714
Epoch 24/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 78s 3s/step - accuracy: 0.8766 - dice_coef: 0.8650 - iou_coef: 0.7658 - loss: -0.8650 - val_accuracy: 0.8505 - val_dice_coef: 0.8598 - val_iou_coef: 0.7552 - val_loss: -0.8566
Epoch 25/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 76s 3s/step - accuracy: 0.9013 - dice_coef: 0.8833 - iou_coef: 0.7928 - loss: -0.8833 - val_accuracy: 0.8639 - val_dice_coef: 0.8540 - val_iou_coef: 0.7495 - val_loss: -0.8484
Epoch 26/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 79s 3s/step - accuracy: 0.8863 - dice_coef: 0.8375 - iou_coef: 0.7271 - loss: -0.8375 - val_accuracy: 0.7994 - val_dice_coef: 0.7995 - val_iou_coef: 0.6790 - val_loss: -0.7995
Epoch 27/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 76s 3s/step - accuracy: 0.8697 - dice_coef: 0.8543 - iou_coef: 0.7495 - loss: -0.8543 - val_accuracy: 0.8483 - val_dice_coef: 0.8464 - val_iou_coef: 0.7369 - val_loss: -0.8360
Epoch 28/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 81s 4s/step - accuracy: 0.8931 - dice_coef: 0.8770 - iou_coef: 0.7827 - loss: -0.8770 - val_accuracy: 0.8036 - val_dice_coef: 0.8017 - val_iou_coef: 0.6752 - val_loss: -0.7941
Epoch 29/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 84s 4s/step - accuracy: 0.8869 - dice_coef: 0.8651 - iou_coef: 0.7654 - loss: -0.8651 - val_accuracy: 0.8602 - val_dice_coef: 0.8733 - val_iou_coef: 0.7756 - val_loss: -0.8702
Epoch 30/30
23/23 ━━━━━━━━━━━━━━━━━━━━ 76s 3s/step - accuracy: 0.8809 - dice_coef: 0.8599 - iou_coef: 0.7574 - loss: -0.8599 - val_accuracy: 0.8989 - val_dice_coef: 0.8550 - val_iou_coef: 0.7530 - val_loss: -0.8696
In [13]:
plot_training(history)
No description has been provided for this image

Model Evaluation¶

In [14]:
ts_length = len(test_df)
test_batch_size = max(sorted([ts_length // n for n in range(1, ts_length + 1) if ts_length%n == 0 and ts_length/n <= 80]))
test_steps = ts_length // test_batch_size

train_score = model.evaluate(train_gen, steps= test_steps, verbose= 1)
valid_score = model.evaluate(valid_gen, steps= test_steps, verbose= 1)
test_score = model.evaluate(test_gen, steps= test_steps, verbose= 1)


print("Train Loss: ", train_score[0])
print("Train Accuracy: ", train_score[1])
print("Train IoU: ", train_score[2])
print("Train Dice: ", train_score[3])
print('-' * 20)

print("Valid Loss: ", valid_score[0])
print("Valid Accuracy: ", valid_score[1])
print("Valid IoU: ", valid_score[2])
print("Valid Dice: ", valid_score[3])
print('-' * 20)

print("Test Loss: ", test_score[0])
print("Test Accuracy: ", test_score[1])
print("Test IoU: ", test_score[2])
print("Test Dice: ", test_score[3])
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 1s/step - accuracy: 0.9141 - dice_coef: 0.9004 - iou_coef: 0.8189 - loss: -0.9004
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 1s/step - accuracy: 0.8707 - dice_coef: 0.8755 - iou_coef: 0.7787 - loss: -0.8755
Found 37 validated image filenames.
Found 37 validated image filenames.
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 1s/step - accuracy: 0.9040 - dice_coef: 0.8556 - iou_coef: 0.7479 - loss: -0.8556
Train Loss:  -0.9003714919090271
Train Accuracy:  0.91412353515625
Train IoU:  0.818939208984375
Train Dice:  0.9003714919090271
--------------------
Valid Loss:  -0.8755173683166504
Valid Accuracy:  0.8707275390625
Valid IoU:  0.7787383794784546
Valid Dice:  0.8755173683166504
--------------------
Test Loss:  -0.85563725233078
Test Accuracy:  0.9040069580078125
Test IoU:  0.747946560382843
Test Dice:  0.85563725233078

Prediction¶

In [15]:
print(test_df['images_paths'].head())
84    C:\Users\anesh\Downloads\data - Copy\dataset1\...
75    C:\Users\anesh\Downloads\data - Copy\dataset1\...
7     C:\Users\anesh\Downloads\data - Copy\dataset1\...
11    C:\Users\anesh\Downloads\data - Copy\dataset1\...
34    C:\Users\anesh\Downloads\data - Copy\dataset1\...
Name: images_paths, dtype: object
In [16]:
for _ in range(50):
    index = np.random.randint(1, len(test_df.index))
    img = cv2.imread(test_df['images_paths'].iloc[index])
    img = cv2.resize(img, (256, 256))
    img = img/255
    img = img[np.newaxis, :, :, : ]

    predicted_img = model.predict(img)

    plt.figure(figsize=(12, 12))
    
    plt.subplot(1, 3, 1)
    plt.imshow(np.squeeze(img))
    plt.axis('off')
    plt.title('Original Image')

    plt.subplot(1, 3, 2)
    plt.imshow(np.squeeze(cv2.imread(test_df['masks_paths'].iloc[index])))
    plt.axis('off')
    plt.title('Original Mask')

    plt.subplot(1, 3, 3)
    plt.imshow(np.squeeze(predicted_img) > 0.5 )
    plt.title('Prediction')
    plt.axis('off')
    
    plt.show()
1/1 ━━━━━━━━━━━━━━━━━━━━ 2s 2s/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 677ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 721ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 618ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 590ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 644ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 613ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 747ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 623ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 687ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 626ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 632ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 576ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 550ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 571ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 614ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 613ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 676ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 623ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 597ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 622ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 579ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 592ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 580ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 628ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 680ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 640ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 643ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 585ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 565ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 572ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 670ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 574ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 571ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 561ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 653ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 647ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 570ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 617ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 582ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 580ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 570ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 569ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 581ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 582ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 612ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 623ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 570ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 586ms/step
No description has been provided for this image
1/1 ━━━━━━━━━━━━━━━━━━━━ 1s 554ms/step
No description has been provided for this image
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]: